In [1]:
import matplotlib.pyplot as plt
import numpy as np
import PIL
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from tensorflow.keras.models import Sequential
In [2]:
import pathlib
data_dir = 'C:\\Users\\starl\\Downloads\\class\\data2\\'
data_dir = pathlib.Path(data_dir).with_suffix('')
In [3]:
data_dir
Out[3]:
WindowsPath('C:/Users/starl/Downloads/class/data2')
In [4]:
image_count = len(list(data_dir.glob('*/*.png')))
print(image_count)
# 데이터 사본의 총 개수
234
In [8]:
ventis = list(data_dir.glob('Venti/*'))
PIL.Image.open(str(ventis[0]))
Out[8]:
In [10]:
kokomi = list(data_dir.glob('Sangonomiya Kokomi/*'))
PIL.Image.open(str(kokomi[0]))
Out[10]:
In [11]:
# 데이터셋 생성 ( 매개변수 지정 )
batch_size = 32
img_height = 180
img_width = 180
In [12]:
train_ds = tf.keras.utils.image_dataset_from_directory(
data_dir,
validation_split=0.2,
subset="training",
seed=123,
image_size=(img_height, img_width),
batch_size=batch_size)
Found 234 files belonging to 60 classes. Using 188 files for training.
In [13]:
val_ds = tf.keras.utils.image_dataset_from_directory(
data_dir,
validation_split=0.2,
subset="validation",
seed=123,
image_size=(img_height, img_width),
batch_size=batch_size)
Found 234 files belonging to 60 classes. Using 46 files for validation.
In [14]:
class_names = train_ds.class_names
print(class_names)
['Aether', 'Albedo', 'Aloy', 'Amber', 'Arataki Itto', 'Barbara', 'Beidou', 'Bennett', 'Candace', 'Chongyun', 'Collei', 'Cyno', 'Diluc', 'Diona', 'Dori', 'Eula', 'Fischl', 'Ganyu', 'Gorou', 'Hu Tao', 'Jean', 'Kaedehara Kazuha', 'Kaeya', 'Kamisato Ayaka', 'Kamisato Ayato', 'Keqing', 'Klee', 'Kujou Sara', 'Kuki Shinobu', 'Layla', 'Lisa', 'Lumine', 'Mona', 'Nahida', 'Nilou', 'Ningguang', 'Noelle', 'Qiqi', 'Raiden Shogun', 'Razor', 'Rosaria', 'Sangonomiya Kokomi', 'Sayu', 'Shenhe', 'Shikanoin Heizou', 'Sucrose', 'Tartaglia', 'Thoma', 'Tighnari', 'Venti', 'Xiangling', 'Xiao', 'Xingqiu', 'Xinyan', 'Yae Miko', 'Yanfei', 'Yelan', 'Yoimiya', 'Yun Jin', 'Zhongli']
In [15]:
import matplotlib.pyplot as plt
plt.figure(figsize=(10, 10))
for images, labels in train_ds.take(1):
for i in range(9):
ax = plt.subplot(5, 10, i + 1)
plt.imshow(images[i].numpy().astype("uint8"))
plt.title(class_names[labels[i]])
plt.axis("off")
# 훈련 데이터세트 출력
In [16]:
for image_batch, labels_batch in train_ds:
print(image_batch.shape)
print(labels_batch.shape)
break
# image_batch는 (32, 180, 180, 3) 형상의 텐서
(32, 180, 180, 3) (32,)
In [17]:
# 데이터세트 구성
AUTOTUNE = tf.data.AUTOTUNE
train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size=AUTOTUNE)
val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
In [18]:
# tf.keras.layers.Rescaling을 사용하여 값을 [0, 1] 범위로 표준화
normalization_layer = layers.Rescaling(1./255)
In [19]:
normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
image_batch, labels_batch = next(iter(normalized_ds))
first_image = image_batch[0]
print(np.min(first_image), np.max(first_image))
# Dataset.map을 호출하여 데이터세트에 이를 적용
0.0 1.0
In [20]:
# 모델 생성
num_classes = len(class_names)
model = Sequential([
layers.Rescaling(1./255, input_shape=(img_height, img_width, 3)),
layers.Conv2D(16, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(32, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(64, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(128, activation='relu'),
layers.Dense(num_classes)
])
# relu 활성화 함수를 통해서 레이어 생성
In [21]:
model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])
# 아담 옵티마이저와 tf.keras.losses.SparseCategoricalCrossentropy 손실 함수를 선택
In [22]:
model.summary()
# 네트워크의 모든 레이어 호출( df['total_bill'].info() 같은 메서드 )
Model: "sequential"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
rescaling_1 (Rescaling) (None, 180, 180, 3) 0
conv2d (Conv2D) (None, 180, 180, 16) 448
max_pooling2d (MaxPooling2 (None, 90, 90, 16) 0
D)
conv2d_1 (Conv2D) (None, 90, 90, 32) 4640
max_pooling2d_1 (MaxPoolin (None, 45, 45, 32) 0
g2D)
conv2d_2 (Conv2D) (None, 45, 45, 64) 18496
max_pooling2d_2 (MaxPoolin (None, 22, 22, 64) 0
g2D)
flatten (Flatten) (None, 30976) 0
dense (Dense) (None, 128) 3965056
dense_1 (Dense) (None, 60) 7740
=================================================================
Total params: 3996380 (15.24 MB)
Trainable params: 3996380 (15.24 MB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
In [23]:
# 100에포크 동안 모델 학습
epochs=100
history = model.fit(
train_ds,
validation_data=val_ds,
epochs=epochs
)
Epoch 1/100 6/6 [==============================] - 9s 1s/step - loss: 4.3825 - accuracy: 0.0106 - val_loss: 4.1407 - val_accuracy: 0.0000e+00 Epoch 2/100 6/6 [==============================] - 6s 1s/step - loss: 4.0310 - accuracy: 0.0638 - val_loss: 4.1804 - val_accuracy: 0.0217 Epoch 3/100 6/6 [==============================] - 5s 788ms/step - loss: 3.8202 - accuracy: 0.1702 - val_loss: 4.2850 - val_accuracy: 0.0000e+00 Epoch 4/100 6/6 [==============================] - 5s 861ms/step - loss: 3.4340 - accuracy: 0.2287 - val_loss: 4.4371 - val_accuracy: 0.0217 Epoch 5/100 6/6 [==============================] - 7s 1s/step - loss: 2.7521 - accuracy: 0.4628 - val_loss: 4.9281 - val_accuracy: 0.0000e+00 Epoch 6/100 6/6 [==============================] - 7s 1s/step - loss: 1.6472 - accuracy: 0.6543 - val_loss: 5.5912 - val_accuracy: 0.0217 Epoch 7/100 6/6 [==============================] - 5s 946ms/step - loss: 0.8228 - accuracy: 0.8723 - val_loss: 6.2738 - val_accuracy: 0.0217 Epoch 8/100 6/6 [==============================] - 6s 1s/step - loss: 0.4674 - accuracy: 0.9043 - val_loss: 7.1846 - val_accuracy: 0.0435 Epoch 9/100 6/6 [==============================] - 7s 1s/step - loss: 0.1745 - accuracy: 0.9787 - val_loss: 8.2793 - val_accuracy: 0.0217 Epoch 10/100 6/6 [==============================] - 7s 1s/step - loss: 0.0827 - accuracy: 0.9840 - val_loss: 8.7844 - val_accuracy: 0.0217 Epoch 11/100 6/6 [==============================] - 6s 991ms/step - loss: 0.0250 - accuracy: 0.9947 - val_loss: 9.3569 - val_accuracy: 0.0217 Epoch 12/100 6/6 [==============================] - 6s 1s/step - loss: 0.0029 - accuracy: 1.0000 - val_loss: 9.7865 - val_accuracy: 0.0652 Epoch 13/100 6/6 [==============================] - 4s 650ms/step - loss: 0.0046 - accuracy: 1.0000 - val_loss: 10.1966 - val_accuracy: 0.0652 Epoch 14/100 6/6 [==============================] - 4s 645ms/step - loss: 0.0026 - accuracy: 1.0000 - val_loss: 10.1790 - val_accuracy: 0.0652 Epoch 15/100 6/6 [==============================] - 5s 800ms/step - loss: 0.0015 - accuracy: 1.0000 - val_loss: 10.1164 - val_accuracy: 0.0652 Epoch 16/100 6/6 [==============================] - 4s 700ms/step - loss: 8.3651e-04 - accuracy: 1.0000 - val_loss: 10.1437 - val_accuracy: 0.0652 Epoch 17/100 6/6 [==============================] - 5s 894ms/step - loss: 5.4617e-04 - accuracy: 1.0000 - val_loss: 10.1915 - val_accuracy: 0.0652 Epoch 18/100 6/6 [==============================] - 4s 736ms/step - loss: 3.7098e-04 - accuracy: 1.0000 - val_loss: 10.3012 - val_accuracy: 0.0652 Epoch 19/100 6/6 [==============================] - 5s 940ms/step - loss: 2.7360e-04 - accuracy: 1.0000 - val_loss: 10.3904 - val_accuracy: 0.0652 Epoch 20/100 6/6 [==============================] - 5s 872ms/step - loss: 2.1921e-04 - accuracy: 1.0000 - val_loss: 10.4642 - val_accuracy: 0.0652 Epoch 21/100 6/6 [==============================] - 5s 736ms/step - loss: 1.8513e-04 - accuracy: 1.0000 - val_loss: 10.5173 - val_accuracy: 0.0652 Epoch 22/100 6/6 [==============================] - 5s 828ms/step - loss: 1.6289e-04 - accuracy: 1.0000 - val_loss: 10.5631 - val_accuracy: 0.0652 Epoch 23/100 6/6 [==============================] - 6s 1s/step - loss: 1.4345e-04 - accuracy: 1.0000 - val_loss: 10.5925 - val_accuracy: 0.0652 Epoch 24/100 6/6 [==============================] - 6s 967ms/step - loss: 1.2964e-04 - accuracy: 1.0000 - val_loss: 10.6117 - val_accuracy: 0.0652 Epoch 25/100 6/6 [==============================] - 4s 765ms/step - loss: 1.1864e-04 - accuracy: 1.0000 - val_loss: 10.6293 - val_accuracy: 0.0652 Epoch 26/100 6/6 [==============================] - 5s 901ms/step - loss: 1.0985e-04 - accuracy: 1.0000 - val_loss: 10.6466 - val_accuracy: 0.0652 Epoch 27/100 6/6 [==============================] - 5s 869ms/step - loss: 1.0258e-04 - accuracy: 1.0000 - val_loss: 10.6617 - val_accuracy: 0.0652 Epoch 28/100 6/6 [==============================] - 4s 713ms/step - loss: 9.7138e-05 - accuracy: 1.0000 - val_loss: 10.6804 - val_accuracy: 0.0652 Epoch 29/100 6/6 [==============================] - 5s 826ms/step - loss: 8.9901e-05 - accuracy: 1.0000 - val_loss: 10.6948 - val_accuracy: 0.0652 Epoch 30/100 6/6 [==============================] - 7s 1s/step - loss: 8.5391e-05 - accuracy: 1.0000 - val_loss: 10.7104 - val_accuracy: 0.0652 Epoch 31/100 6/6 [==============================] - 5s 775ms/step - loss: 8.0914e-05 - accuracy: 1.0000 - val_loss: 10.7297 - val_accuracy: 0.0652 Epoch 32/100 6/6 [==============================] - 6s 962ms/step - loss: 7.6429e-05 - accuracy: 1.0000 - val_loss: 10.7433 - val_accuracy: 0.0652 Epoch 33/100 6/6 [==============================] - 4s 710ms/step - loss: 7.2769e-05 - accuracy: 1.0000 - val_loss: 10.7580 - val_accuracy: 0.0652 Epoch 34/100 6/6 [==============================] - 4s 743ms/step - loss: 6.9906e-05 - accuracy: 1.0000 - val_loss: 10.7763 - val_accuracy: 0.0652 Epoch 35/100 6/6 [==============================] - 4s 747ms/step - loss: 6.6448e-05 - accuracy: 1.0000 - val_loss: 10.7906 - val_accuracy: 0.0652 Epoch 36/100 6/6 [==============================] - 5s 852ms/step - loss: 6.3763e-05 - accuracy: 1.0000 - val_loss: 10.8084 - val_accuracy: 0.0652 Epoch 37/100 6/6 [==============================] - 5s 811ms/step - loss: 6.0624e-05 - accuracy: 1.0000 - val_loss: 10.8219 - val_accuracy: 0.0652 Epoch 38/100 6/6 [==============================] - 6s 1s/step - loss: 5.8421e-05 - accuracy: 1.0000 - val_loss: 10.8361 - val_accuracy: 0.0652 Epoch 39/100 6/6 [==============================] - 4s 721ms/step - loss: 5.6304e-05 - accuracy: 1.0000 - val_loss: 10.8518 - val_accuracy: 0.0652 Epoch 40/100 6/6 [==============================] - 5s 882ms/step - loss: 5.3717e-05 - accuracy: 1.0000 - val_loss: 10.8638 - val_accuracy: 0.0652 Epoch 41/100 6/6 [==============================] - 5s 798ms/step - loss: 5.1974e-05 - accuracy: 1.0000 - val_loss: 10.8792 - val_accuracy: 0.0652 Epoch 42/100 6/6 [==============================] - 6s 1s/step - loss: 4.9936e-05 - accuracy: 1.0000 - val_loss: 10.8925 - val_accuracy: 0.0652 Epoch 43/100 6/6 [==============================] - 5s 848ms/step - loss: 4.8110e-05 - accuracy: 1.0000 - val_loss: 10.9035 - val_accuracy: 0.0652 Epoch 44/100 6/6 [==============================] - 5s 871ms/step - loss: 4.6562e-05 - accuracy: 1.0000 - val_loss: 10.9157 - val_accuracy: 0.0652 Epoch 45/100 6/6 [==============================] - 7s 1s/step - loss: 4.4760e-05 - accuracy: 1.0000 - val_loss: 10.9268 - val_accuracy: 0.0652 Epoch 46/100 6/6 [==============================] - 6s 1s/step - loss: 4.3376e-05 - accuracy: 1.0000 - val_loss: 10.9359 - val_accuracy: 0.0652 Epoch 47/100 6/6 [==============================] - 5s 786ms/step - loss: 4.1922e-05 - accuracy: 1.0000 - val_loss: 10.9461 - val_accuracy: 0.0652 Epoch 48/100 6/6 [==============================] - 5s 770ms/step - loss: 4.0665e-05 - accuracy: 1.0000 - val_loss: 10.9568 - val_accuracy: 0.0652 Epoch 49/100 6/6 [==============================] - 5s 970ms/step - loss: 3.9349e-05 - accuracy: 1.0000 - val_loss: 10.9674 - val_accuracy: 0.0652 Epoch 50/100 6/6 [==============================] - 4s 717ms/step - loss: 3.8422e-05 - accuracy: 1.0000 - val_loss: 10.9806 - val_accuracy: 0.0652 Epoch 51/100 6/6 [==============================] - 5s 842ms/step - loss: 3.7213e-05 - accuracy: 1.0000 - val_loss: 10.9925 - val_accuracy: 0.0652 Epoch 52/100 6/6 [==============================] - 5s 830ms/step - loss: 3.6017e-05 - accuracy: 1.0000 - val_loss: 11.0017 - val_accuracy: 0.0652 Epoch 53/100 6/6 [==============================] - 5s 798ms/step - loss: 3.5097e-05 - accuracy: 1.0000 - val_loss: 11.0107 - val_accuracy: 0.0652 Epoch 54/100 6/6 [==============================] - 5s 791ms/step - loss: 3.4071e-05 - accuracy: 1.0000 - val_loss: 11.0211 - val_accuracy: 0.0652 Epoch 55/100 6/6 [==============================] - 5s 789ms/step - loss: 3.3137e-05 - accuracy: 1.0000 - val_loss: 11.0312 - val_accuracy: 0.0652 Epoch 56/100 6/6 [==============================] - 5s 781ms/step - loss: 3.2218e-05 - accuracy: 1.0000 - val_loss: 11.0378 - val_accuracy: 0.0652 Epoch 57/100 6/6 [==============================] - 5s 829ms/step - loss: 3.1417e-05 - accuracy: 1.0000 - val_loss: 11.0470 - val_accuracy: 0.0652 Epoch 58/100 6/6 [==============================] - 7s 1s/step - loss: 3.0649e-05 - accuracy: 1.0000 - val_loss: 11.0573 - val_accuracy: 0.0652 Epoch 59/100 6/6 [==============================] - 6s 942ms/step - loss: 2.9817e-05 - accuracy: 1.0000 - val_loss: 11.0654 - val_accuracy: 0.0652 Epoch 60/100 6/6 [==============================] - 5s 787ms/step - loss: 2.9164e-05 - accuracy: 1.0000 - val_loss: 11.0756 - val_accuracy: 0.0652 Epoch 61/100 6/6 [==============================] - 5s 919ms/step - loss: 2.8411e-05 - accuracy: 1.0000 - val_loss: 11.0855 - val_accuracy: 0.0652 Epoch 62/100 6/6 [==============================] - 6s 931ms/step - loss: 2.7772e-05 - accuracy: 1.0000 - val_loss: 11.0938 - val_accuracy: 0.0652 Epoch 63/100 6/6 [==============================] - 5s 790ms/step - loss: 2.7037e-05 - accuracy: 1.0000 - val_loss: 11.1018 - val_accuracy: 0.0652 Epoch 64/100 6/6 [==============================] - 4s 727ms/step - loss: 2.6436e-05 - accuracy: 1.0000 - val_loss: 11.1087 - val_accuracy: 0.0652 Epoch 65/100 6/6 [==============================] - 4s 704ms/step - loss: 2.5894e-05 - accuracy: 1.0000 - val_loss: 11.1167 - val_accuracy: 0.0652 Epoch 66/100 6/6 [==============================] - 5s 780ms/step - loss: 2.5258e-05 - accuracy: 1.0000 - val_loss: 11.1239 - val_accuracy: 0.0652 Epoch 67/100 6/6 [==============================] - 5s 753ms/step - loss: 2.4751e-05 - accuracy: 1.0000 - val_loss: 11.1326 - val_accuracy: 0.0652 Epoch 68/100 6/6 [==============================] - 5s 773ms/step - loss: 2.4191e-05 - accuracy: 1.0000 - val_loss: 11.1411 - val_accuracy: 0.0652 Epoch 69/100 6/6 [==============================] - 4s 739ms/step - loss: 2.3672e-05 - accuracy: 1.0000 - val_loss: 11.1495 - val_accuracy: 0.0652 Epoch 70/100 6/6 [==============================] - 4s 725ms/step - loss: 2.3168e-05 - accuracy: 1.0000 - val_loss: 11.1561 - val_accuracy: 0.0652 Epoch 71/100 6/6 [==============================] - 5s 791ms/step - loss: 2.2704e-05 - accuracy: 1.0000 - val_loss: 11.1644 - val_accuracy: 0.0652 Epoch 72/100 6/6 [==============================] - 4s 663ms/step - loss: 2.2221e-05 - accuracy: 1.0000 - val_loss: 11.1712 - val_accuracy: 0.0652 Epoch 73/100 6/6 [==============================] - 4s 648ms/step - loss: 2.1765e-05 - accuracy: 1.0000 - val_loss: 11.1784 - val_accuracy: 0.0652 Epoch 74/100 6/6 [==============================] - 4s 664ms/step - loss: 2.1369e-05 - accuracy: 1.0000 - val_loss: 11.1870 - val_accuracy: 0.0652 Epoch 75/100 6/6 [==============================] - 4s 686ms/step - loss: 2.0982e-05 - accuracy: 1.0000 - val_loss: 11.1962 - val_accuracy: 0.0652 Epoch 76/100 6/6 [==============================] - 4s 707ms/step - loss: 2.0544e-05 - accuracy: 1.0000 - val_loss: 11.2038 - val_accuracy: 0.0652 Epoch 77/100 6/6 [==============================] - 4s 725ms/step - loss: 2.0097e-05 - accuracy: 1.0000 - val_loss: 11.2096 - val_accuracy: 0.0652 Epoch 78/100 6/6 [==============================] - 5s 820ms/step - loss: 1.9732e-05 - accuracy: 1.0000 - val_loss: 11.2160 - val_accuracy: 0.0652 Epoch 79/100 6/6 [==============================] - 5s 923ms/step - loss: 1.9368e-05 - accuracy: 1.0000 - val_loss: 11.2237 - val_accuracy: 0.0652 Epoch 80/100 6/6 [==============================] - 5s 799ms/step - loss: 1.9013e-05 - accuracy: 1.0000 - val_loss: 11.2306 - val_accuracy: 0.0652 Epoch 81/100 6/6 [==============================] - 5s 759ms/step - loss: 1.8672e-05 - accuracy: 1.0000 - val_loss: 11.2388 - val_accuracy: 0.0652 Epoch 82/100 6/6 [==============================] - 5s 818ms/step - loss: 1.8323e-05 - accuracy: 1.0000 - val_loss: 11.2461 - val_accuracy: 0.0652 Epoch 83/100 6/6 [==============================] - 4s 740ms/step - loss: 1.7994e-05 - accuracy: 1.0000 - val_loss: 11.2529 - val_accuracy: 0.0652 Epoch 84/100 6/6 [==============================] - 4s 690ms/step - loss: 1.7682e-05 - accuracy: 1.0000 - val_loss: 11.2590 - val_accuracy: 0.0652 Epoch 85/100 6/6 [==============================] - 4s 666ms/step - loss: 1.7376e-05 - accuracy: 1.0000 - val_loss: 11.2665 - val_accuracy: 0.0652 Epoch 86/100 6/6 [==============================] - 4s 725ms/step - loss: 1.7066e-05 - accuracy: 1.0000 - val_loss: 11.2730 - val_accuracy: 0.0652 Epoch 87/100 6/6 [==============================] - 4s 690ms/step - loss: 1.6743e-05 - accuracy: 1.0000 - val_loss: 11.2780 - val_accuracy: 0.0652 Epoch 88/100 6/6 [==============================] - 4s 735ms/step - loss: 1.6459e-05 - accuracy: 1.0000 - val_loss: 11.2847 - val_accuracy: 0.0652 Epoch 89/100 6/6 [==============================] - 4s 696ms/step - loss: 1.6186e-05 - accuracy: 1.0000 - val_loss: 11.2908 - val_accuracy: 0.0652 Epoch 90/100 6/6 [==============================] - 5s 792ms/step - loss: 1.5968e-05 - accuracy: 1.0000 - val_loss: 11.2985 - val_accuracy: 0.0652 Epoch 91/100 6/6 [==============================] - 4s 760ms/step - loss: 1.5643e-05 - accuracy: 1.0000 - val_loss: 11.3056 - val_accuracy: 0.0652 Epoch 92/100 6/6 [==============================] - 4s 756ms/step - loss: 1.5380e-05 - accuracy: 1.0000 - val_loss: 11.3112 - val_accuracy: 0.0652 Epoch 93/100 6/6 [==============================] - 4s 727ms/step - loss: 1.5141e-05 - accuracy: 1.0000 - val_loss: 11.3178 - val_accuracy: 0.0652 Epoch 94/100 6/6 [==============================] - 4s 689ms/step - loss: 1.4902e-05 - accuracy: 1.0000 - val_loss: 11.3247 - val_accuracy: 0.0652 Epoch 95/100 6/6 [==============================] - 4s 707ms/step - loss: 1.4646e-05 - accuracy: 1.0000 - val_loss: 11.3319 - val_accuracy: 0.0652 Epoch 96/100 6/6 [==============================] - 5s 798ms/step - loss: 1.4395e-05 - accuracy: 1.0000 - val_loss: 11.3380 - val_accuracy: 0.0652 Epoch 97/100 6/6 [==============================] - 4s 734ms/step - loss: 1.4159e-05 - accuracy: 1.0000 - val_loss: 11.3430 - val_accuracy: 0.0652 Epoch 98/100 6/6 [==============================] - 5s 781ms/step - loss: 1.3937e-05 - accuracy: 1.0000 - val_loss: 11.3476 - val_accuracy: 0.0652 Epoch 99/100 6/6 [==============================] - 5s 846ms/step - loss: 1.3708e-05 - accuracy: 1.0000 - val_loss: 11.3521 - val_accuracy: 0.0652 Epoch 100/100 6/6 [==============================] - 6s 994ms/step - loss: 1.3516e-05 - accuracy: 1.0000 - val_loss: 11.3587 - val_accuracy: 0.0652
In [25]:
model.save('./models/genshin.HDF5')
INFO:tensorflow:Assets written to: ./models/genshin.HDF5\assets
INFO:tensorflow:Assets written to: ./models/genshin.HDF5\assets
In [26]:
# 훈련 결과 시각화
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
In [24]:
PIL.Image.open('./test/venti.jpg')
Out[24]:
In [31]:
# 모델 테스트
char_path = './test/venti.jpg'
img = tf.keras.utils.load_img(
char_path, target_size=(img_height, img_width)
)
img_array = tf.keras.utils.img_to_array(img)
img_array = tf.expand_dims(img_array, 0) # 배치 생성
predictions = model.predict(img_array)
score = tf.nn.softmax(predictions[0])
print(
"이 캐릭터는 [ {} ] 같습니다. 추정 확률은 {:.2f} % 입니다."
.format(class_names[np.argmax(score)], 100 * np.max(score))
)
1/1 [==============================] - 0s 109ms/step 이 캐릭터는 [ Eula ] 같습니다. 추정 확률은 26.56 % 입니다.
In [28]:
PIL.Image.open('./test/kokomi.jpg')
Out[28]:
In [30]:
# 모델 테스트
char_path = './test/kokomi.jpg'
img = tf.keras.utils.load_img(
char_path, target_size=(img_height, img_width)
)
img_array = tf.keras.utils.img_to_array(img)
img_array = tf.expand_dims(img_array, 0) # 배치 생성
predictions = model.predict(img_array)
score = tf.nn.softmax(predictions[0])
print(
"이 캐릭터는 [ {} ] 같습니다. 추정 확률은 {:.2f} % 입니다."
.format(class_names[np.argmax(score)], 100 * np.max(score))
)
1/1 [==============================] - 0s 265ms/step 이 캐릭터는 [ Lumine ] 같습니다. 추정 확률은 91.63 % 입니다.
In [ ]:
# 데이터 추가 필요 ( 데이터 부족이 원인같음 )
In [32]:
import pathlib
data_dir = 'C:\\Users\\starl\\Downloads\\class\\data3\\'
data_dir = pathlib.Path(data_dir).with_suffix('')
In [33]:
image_count = len(list(data_dir.glob('*/*.jpg')))
print(image_count)
# 데이터 사본의 총 개수
400
In [36]:
import random
import os
hu_tao = list(data_dir.glob('Hu Tao/*'))
PIL.Image.open(str(hu_tao[random.randint(1, len(os.listdir('./data3/Hu Tao')))]))
# 랜덤 이미지 추출
Out[36]:
In [38]:
import random
import os
kokomi = list(data_dir.glob('kokomi/*'))
PIL.Image.open(str(kokomi[random.randint(1, len(os.listdir('./data3/kokomi')))]))
# 랜덤 이미지 추출
Out[38]:
In [39]:
# 데이터셋 생성 ( 매개변수 지정 )
batch_size = 32
img_height = 180
img_width = 180
train_ds = tf.keras.utils.image_dataset_from_directory(
data_dir,
validation_split=0.2,
subset="training",
seed=123,
image_size=(img_height, img_width),
batch_size=batch_size)
Found 400 files belonging to 4 classes. Using 320 files for training.
In [40]:
val_ds = tf.keras.utils.image_dataset_from_directory(
data_dir,
validation_split=0.2,
subset="validation",
seed=123,
image_size=(img_height, img_width),
batch_size=batch_size)
Found 400 files belonging to 4 classes. Using 80 files for validation.
In [41]:
class_names = train_ds.class_names
print(class_names)
['Albedo', 'Ayaka', 'Hu Tao', 'Kokomi']
In [46]:
import matplotlib.pyplot as plt
plt.figure(figsize=(10, 10))
for images, labels in train_ds.take(1):
for i in range(9):
ax = plt.subplot(5, 10, i + 1)
plt.imshow(images[i].numpy().astype("uint8"))
plt.title(class_names[labels[i]])
plt.axis("off")
# 훈련 데이터세트 출력
In [47]:
for image_batch, labels_batch in train_ds:
print(image_batch.shape)
print(labels_batch.shape)
break
# image_batch는 (32, 180, 180, 3) 형상의 텐서
(32, 180, 180, 3) (32,)
In [48]:
# 데이터세트 구성
AUTOTUNE = tf.data.AUTOTUNE
train_ds = train_ds.cache().shuffle(1000).prefetch(buffer_size=AUTOTUNE)
val_ds = val_ds.cache().prefetch(buffer_size=AUTOTUNE)
In [49]:
# tf.keras.layers.Rescaling을 사용하여 값을 [0, 1] 범위로 표준화
normalization_layer = layers.Rescaling(1./255)
In [50]:
normalized_ds = train_ds.map(lambda x, y: (normalization_layer(x), y))
image_batch, labels_batch = next(iter(normalized_ds))
first_image = image_batch[0]
print(np.min(first_image), np.max(first_image))
# Dataset.map을 호출하여 데이터세트에 이를 적용
0.0 1.0
In [51]:
# 모델 생성
num_classes = len(class_names)
model = Sequential([
layers.Rescaling(1./255, input_shape=(img_height, img_width, 3)),
layers.Conv2D(16, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(32, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Conv2D(64, 3, padding='same', activation='relu'),
layers.MaxPooling2D(),
layers.Flatten(),
layers.Dense(128, activation='relu'),
layers.Dense(num_classes)
])
# relu 활성화 함수를 통해서 레이어 생성
In [52]:
model.compile(optimizer='adam',
loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
metrics=['accuracy'])
# 아담 옵티마이저와 tf.keras.losses.SparseCategoricalCrossentropy 손실 함수를 선택
In [53]:
model.summary()
# 네트워크의 모든 레이어 호출( df['total_bill'].info() 같은 메서드 )
Model: "sequential_1"
_________________________________________________________________
Layer (type) Output Shape Param #
=================================================================
rescaling_3 (Rescaling) (None, 180, 180, 3) 0
conv2d_3 (Conv2D) (None, 180, 180, 16) 448
max_pooling2d_3 (MaxPoolin (None, 90, 90, 16) 0
g2D)
conv2d_4 (Conv2D) (None, 90, 90, 32) 4640
max_pooling2d_4 (MaxPoolin (None, 45, 45, 32) 0
g2D)
conv2d_5 (Conv2D) (None, 45, 45, 64) 18496
max_pooling2d_5 (MaxPoolin (None, 22, 22, 64) 0
g2D)
flatten_1 (Flatten) (None, 30976) 0
dense_2 (Dense) (None, 128) 3965056
dense_3 (Dense) (None, 4) 516
=================================================================
Total params: 3989156 (15.22 MB)
Trainable params: 3989156 (15.22 MB)
Non-trainable params: 0 (0.00 Byte)
_________________________________________________________________
In [54]:
# 100에포크 동안 모델 학습
epochs=100
history = model.fit(
train_ds,
validation_data=val_ds,
epochs=epochs
)
Epoch 1/100 10/10 [==============================] - 10s 887ms/step - loss: 2.1312 - accuracy: 0.3531 - val_loss: 1.3565 - val_accuracy: 0.3625 Epoch 2/100 10/10 [==============================] - 8s 778ms/step - loss: 1.2047 - accuracy: 0.4219 - val_loss: 1.2171 - val_accuracy: 0.4500 Epoch 3/100 10/10 [==============================] - 8s 766ms/step - loss: 0.9867 - accuracy: 0.5281 - val_loss: 0.9715 - val_accuracy: 0.5125 Epoch 4/100 10/10 [==============================] - 8s 770ms/step - loss: 0.7383 - accuracy: 0.6875 - val_loss: 0.8079 - val_accuracy: 0.6500 Epoch 5/100 10/10 [==============================] - 9s 871ms/step - loss: 0.5383 - accuracy: 0.8000 - val_loss: 0.6835 - val_accuracy: 0.7375 Epoch 6/100 10/10 [==============================] - 8s 755ms/step - loss: 0.4184 - accuracy: 0.8594 - val_loss: 0.7421 - val_accuracy: 0.7250 Epoch 7/100 10/10 [==============================] - 10s 1s/step - loss: 0.2911 - accuracy: 0.9250 - val_loss: 0.8096 - val_accuracy: 0.6875 Epoch 8/100 10/10 [==============================] - 8s 811ms/step - loss: 0.2162 - accuracy: 0.9375 - val_loss: 0.7465 - val_accuracy: 0.7500 Epoch 9/100 10/10 [==============================] - 9s 958ms/step - loss: 0.1628 - accuracy: 0.9438 - val_loss: 0.8327 - val_accuracy: 0.7500 Epoch 10/100 10/10 [==============================] - 8s 844ms/step - loss: 0.1079 - accuracy: 0.9719 - val_loss: 0.8495 - val_accuracy: 0.7750 Epoch 11/100 10/10 [==============================] - 8s 840ms/step - loss: 0.0769 - accuracy: 0.9875 - val_loss: 0.5986 - val_accuracy: 0.7750 Epoch 12/100 10/10 [==============================] - 9s 906ms/step - loss: 0.0620 - accuracy: 0.9812 - val_loss: 0.6603 - val_accuracy: 0.7875 Epoch 13/100 10/10 [==============================] - 11s 1s/step - loss: 0.0609 - accuracy: 0.9781 - val_loss: 0.6658 - val_accuracy: 0.7750 Epoch 14/100 10/10 [==============================] - 8s 829ms/step - loss: 0.0870 - accuracy: 0.9719 - val_loss: 0.9394 - val_accuracy: 0.7250 Epoch 15/100 10/10 [==============================] - 7s 729ms/step - loss: 0.0934 - accuracy: 0.9812 - val_loss: 0.5432 - val_accuracy: 0.8000 Epoch 16/100 10/10 [==============================] - 12s 1s/step - loss: 0.0401 - accuracy: 0.9906 - val_loss: 0.6761 - val_accuracy: 0.7875 Epoch 17/100 10/10 [==============================] - 9s 873ms/step - loss: 0.0449 - accuracy: 0.9875 - val_loss: 0.7281 - val_accuracy: 0.7625 Epoch 18/100 10/10 [==============================] - 10s 976ms/step - loss: 0.0301 - accuracy: 0.9969 - val_loss: 0.7653 - val_accuracy: 0.7875 Epoch 19/100 10/10 [==============================] - 10s 1s/step - loss: 0.0123 - accuracy: 0.9969 - val_loss: 0.8545 - val_accuracy: 0.7750 Epoch 20/100 10/10 [==============================] - 9s 863ms/step - loss: 0.0054 - accuracy: 1.0000 - val_loss: 1.0139 - val_accuracy: 0.8125 Epoch 21/100 10/10 [==============================] - 10s 1s/step - loss: 0.0035 - accuracy: 1.0000 - val_loss: 0.8483 - val_accuracy: 0.8000 Epoch 22/100 10/10 [==============================] - 9s 886ms/step - loss: 0.0021 - accuracy: 1.0000 - val_loss: 0.9052 - val_accuracy: 0.8125 Epoch 23/100 10/10 [==============================] - 7s 683ms/step - loss: 0.0017 - accuracy: 1.0000 - val_loss: 0.9049 - val_accuracy: 0.8000 Epoch 24/100 10/10 [==============================] - 7s 678ms/step - loss: 0.0013 - accuracy: 1.0000 - val_loss: 0.9681 - val_accuracy: 0.8000 Epoch 25/100 10/10 [==============================] - 7s 683ms/step - loss: 0.0011 - accuracy: 1.0000 - val_loss: 0.9836 - val_accuracy: 0.8000 Epoch 26/100 10/10 [==============================] - 7s 683ms/step - loss: 9.5899e-04 - accuracy: 1.0000 - val_loss: 0.9885 - val_accuracy: 0.8000 Epoch 27/100 10/10 [==============================] - 7s 667ms/step - loss: 8.5743e-04 - accuracy: 1.0000 - val_loss: 1.0054 - val_accuracy: 0.8000 Epoch 28/100 10/10 [==============================] - 7s 670ms/step - loss: 7.7261e-04 - accuracy: 1.0000 - val_loss: 1.0174 - val_accuracy: 0.8125 Epoch 29/100 10/10 [==============================] - 7s 687ms/step - loss: 7.0321e-04 - accuracy: 1.0000 - val_loss: 1.0368 - val_accuracy: 0.8000 Epoch 30/100 10/10 [==============================] - 7s 680ms/step - loss: 6.4278e-04 - accuracy: 1.0000 - val_loss: 1.0408 - val_accuracy: 0.8125 Epoch 31/100 10/10 [==============================] - 7s 679ms/step - loss: 5.9647e-04 - accuracy: 1.0000 - val_loss: 1.0584 - val_accuracy: 0.8000 Epoch 32/100 10/10 [==============================] - 7s 670ms/step - loss: 5.5382e-04 - accuracy: 1.0000 - val_loss: 1.0733 - val_accuracy: 0.8125 Epoch 33/100 10/10 [==============================] - 7s 715ms/step - loss: 5.1674e-04 - accuracy: 1.0000 - val_loss: 1.0793 - val_accuracy: 0.8125 Epoch 34/100 10/10 [==============================] - 7s 685ms/step - loss: 4.9771e-04 - accuracy: 1.0000 - val_loss: 1.0786 - val_accuracy: 0.8125 Epoch 35/100 10/10 [==============================] - 8s 770ms/step - loss: 4.4487e-04 - accuracy: 1.0000 - val_loss: 1.0977 - val_accuracy: 0.8000 Epoch 36/100 10/10 [==============================] - 7s 705ms/step - loss: 4.1632e-04 - accuracy: 1.0000 - val_loss: 1.1119 - val_accuracy: 0.8125 Epoch 37/100 10/10 [==============================] - 7s 753ms/step - loss: 3.9250e-04 - accuracy: 1.0000 - val_loss: 1.1244 - val_accuracy: 0.8250 Epoch 38/100 10/10 [==============================] - 8s 773ms/step - loss: 3.6459e-04 - accuracy: 1.0000 - val_loss: 1.1309 - val_accuracy: 0.8000 Epoch 39/100 10/10 [==============================] - 7s 710ms/step - loss: 3.4460e-04 - accuracy: 1.0000 - val_loss: 1.1454 - val_accuracy: 0.8250 Epoch 40/100 10/10 [==============================] - 7s 735ms/step - loss: 3.2113e-04 - accuracy: 1.0000 - val_loss: 1.1484 - val_accuracy: 0.8000 Epoch 41/100 10/10 [==============================] - 7s 700ms/step - loss: 3.0817e-04 - accuracy: 1.0000 - val_loss: 1.1519 - val_accuracy: 0.8250 Epoch 42/100 10/10 [==============================] - 7s 684ms/step - loss: 2.9383e-04 - accuracy: 1.0000 - val_loss: 1.1778 - val_accuracy: 0.8000 Epoch 43/100 10/10 [==============================] - 7s 690ms/step - loss: 2.7234e-04 - accuracy: 1.0000 - val_loss: 1.1823 - val_accuracy: 0.8250 Epoch 44/100 10/10 [==============================] - 7s 682ms/step - loss: 2.6219e-04 - accuracy: 1.0000 - val_loss: 1.1951 - val_accuracy: 0.8000 Epoch 45/100 10/10 [==============================] - 7s 688ms/step - loss: 2.4925e-04 - accuracy: 1.0000 - val_loss: 1.1941 - val_accuracy: 0.8125 Epoch 46/100 10/10 [==============================] - 7s 679ms/step - loss: 2.3201e-04 - accuracy: 1.0000 - val_loss: 1.2163 - val_accuracy: 0.8000 Epoch 47/100 10/10 [==============================] - 7s 677ms/step - loss: 2.2107e-04 - accuracy: 1.0000 - val_loss: 1.2114 - val_accuracy: 0.8125 Epoch 48/100 10/10 [==============================] - 7s 681ms/step - loss: 2.1418e-04 - accuracy: 1.0000 - val_loss: 1.2288 - val_accuracy: 0.8000 Epoch 49/100 10/10 [==============================] - 7s 681ms/step - loss: 2.0060e-04 - accuracy: 1.0000 - val_loss: 1.2399 - val_accuracy: 0.8125 Epoch 50/100 10/10 [==============================] - 7s 683ms/step - loss: 1.9251e-04 - accuracy: 1.0000 - val_loss: 1.2572 - val_accuracy: 0.8125 Epoch 51/100 10/10 [==============================] - 7s 684ms/step - loss: 1.8769e-04 - accuracy: 1.0000 - val_loss: 1.2375 - val_accuracy: 0.8125 Epoch 52/100 10/10 [==============================] - 7s 673ms/step - loss: 1.8086e-04 - accuracy: 1.0000 - val_loss: 1.2541 - val_accuracy: 0.8000 Epoch 53/100 10/10 [==============================] - 7s 682ms/step - loss: 1.6912e-04 - accuracy: 1.0000 - val_loss: 1.2670 - val_accuracy: 0.8125 Epoch 54/100 10/10 [==============================] - 7s 679ms/step - loss: 1.6195e-04 - accuracy: 1.0000 - val_loss: 1.2861 - val_accuracy: 0.8125 Epoch 55/100 10/10 [==============================] - 7s 673ms/step - loss: 1.5139e-04 - accuracy: 1.0000 - val_loss: 1.2966 - val_accuracy: 0.8125 Epoch 56/100 10/10 [==============================] - 7s 664ms/step - loss: 1.4818e-04 - accuracy: 1.0000 - val_loss: 1.2996 - val_accuracy: 0.8125 Epoch 57/100 10/10 [==============================] - 7s 666ms/step - loss: 1.4365e-04 - accuracy: 1.0000 - val_loss: 1.2896 - val_accuracy: 0.8125 Epoch 58/100 10/10 [==============================] - 7s 666ms/step - loss: 1.4148e-04 - accuracy: 1.0000 - val_loss: 1.3195 - val_accuracy: 0.8000 Epoch 59/100 10/10 [==============================] - 7s 697ms/step - loss: 1.3122e-04 - accuracy: 1.0000 - val_loss: 1.3092 - val_accuracy: 0.8125 Epoch 60/100 10/10 [==============================] - 7s 668ms/step - loss: 1.2192e-04 - accuracy: 1.0000 - val_loss: 1.3234 - val_accuracy: 0.8125 Epoch 61/100 10/10 [==============================] - 7s 667ms/step - loss: 1.1763e-04 - accuracy: 1.0000 - val_loss: 1.3320 - val_accuracy: 0.8125 Epoch 62/100 10/10 [==============================] - 7s 686ms/step - loss: 1.1307e-04 - accuracy: 1.0000 - val_loss: 1.3431 - val_accuracy: 0.8125 Epoch 63/100 10/10 [==============================] - 7s 665ms/step - loss: 1.1102e-04 - accuracy: 1.0000 - val_loss: 1.3443 - val_accuracy: 0.8125 Epoch 64/100 10/10 [==============================] - 7s 665ms/step - loss: 1.0566e-04 - accuracy: 1.0000 - val_loss: 1.3433 - val_accuracy: 0.8125 Epoch 65/100 10/10 [==============================] - 7s 664ms/step - loss: 1.0153e-04 - accuracy: 1.0000 - val_loss: 1.3594 - val_accuracy: 0.8125 Epoch 66/100 10/10 [==============================] - 7s 679ms/step - loss: 9.8235e-05 - accuracy: 1.0000 - val_loss: 1.3534 - val_accuracy: 0.8125 Epoch 67/100 10/10 [==============================] - 7s 668ms/step - loss: 9.3993e-05 - accuracy: 1.0000 - val_loss: 1.3632 - val_accuracy: 0.8125 Epoch 68/100 10/10 [==============================] - 7s 667ms/step - loss: 9.2462e-05 - accuracy: 1.0000 - val_loss: 1.3800 - val_accuracy: 0.8125 Epoch 69/100 10/10 [==============================] - 7s 671ms/step - loss: 8.9840e-05 - accuracy: 1.0000 - val_loss: 1.3784 - val_accuracy: 0.7875 Epoch 70/100 10/10 [==============================] - 7s 670ms/step - loss: 8.6830e-05 - accuracy: 1.0000 - val_loss: 1.3884 - val_accuracy: 0.8125 Epoch 71/100 10/10 [==============================] - 7s 664ms/step - loss: 8.2839e-05 - accuracy: 1.0000 - val_loss: 1.3868 - val_accuracy: 0.8125 Epoch 72/100 10/10 [==============================] - 7s 665ms/step - loss: 7.9219e-05 - accuracy: 1.0000 - val_loss: 1.3919 - val_accuracy: 0.8125 Epoch 73/100 10/10 [==============================] - 7s 676ms/step - loss: 7.6702e-05 - accuracy: 1.0000 - val_loss: 1.3965 - val_accuracy: 0.8125 Epoch 74/100 10/10 [==============================] - 7s 669ms/step - loss: 7.4049e-05 - accuracy: 1.0000 - val_loss: 1.4053 - val_accuracy: 0.8125 Epoch 75/100 10/10 [==============================] - 7s 669ms/step - loss: 7.2392e-05 - accuracy: 1.0000 - val_loss: 1.4039 - val_accuracy: 0.8125 Epoch 76/100 10/10 [==============================] - 7s 668ms/step - loss: 7.1185e-05 - accuracy: 1.0000 - val_loss: 1.4180 - val_accuracy: 0.8125 Epoch 77/100 10/10 [==============================] - 7s 674ms/step - loss: 7.1389e-05 - accuracy: 1.0000 - val_loss: 1.4133 - val_accuracy: 0.7875 Epoch 78/100 10/10 [==============================] - 7s 662ms/step - loss: 6.8864e-05 - accuracy: 1.0000 - val_loss: 1.4345 - val_accuracy: 0.8125 Epoch 79/100 10/10 [==============================] - 7s 676ms/step - loss: 6.4980e-05 - accuracy: 1.0000 - val_loss: 1.4264 - val_accuracy: 0.8125 Epoch 80/100 10/10 [==============================] - 7s 670ms/step - loss: 6.3597e-05 - accuracy: 1.0000 - val_loss: 1.4327 - val_accuracy: 0.8125 Epoch 81/100 10/10 [==============================] - 7s 670ms/step - loss: 6.0305e-05 - accuracy: 1.0000 - val_loss: 1.4371 - val_accuracy: 0.8000 Epoch 82/100 10/10 [==============================] - 7s 674ms/step - loss: 5.8236e-05 - accuracy: 1.0000 - val_loss: 1.4549 - val_accuracy: 0.8125 Epoch 83/100 10/10 [==============================] - 7s 665ms/step - loss: 5.6330e-05 - accuracy: 1.0000 - val_loss: 1.4480 - val_accuracy: 0.8000 Epoch 84/100 10/10 [==============================] - 7s 668ms/step - loss: 5.4761e-05 - accuracy: 1.0000 - val_loss: 1.4597 - val_accuracy: 0.8125 Epoch 85/100 10/10 [==============================] - 7s 667ms/step - loss: 5.3698e-05 - accuracy: 1.0000 - val_loss: 1.4605 - val_accuracy: 0.8125 Epoch 86/100 10/10 [==============================] - 7s 685ms/step - loss: 5.3100e-05 - accuracy: 1.0000 - val_loss: 1.4673 - val_accuracy: 0.8000 Epoch 87/100 10/10 [==============================] - 7s 671ms/step - loss: 5.2338e-05 - accuracy: 1.0000 - val_loss: 1.4642 - val_accuracy: 0.8000 Epoch 88/100 10/10 [==============================] - 7s 668ms/step - loss: 5.2763e-05 - accuracy: 1.0000 - val_loss: 1.4803 - val_accuracy: 0.8125 Epoch 89/100 10/10 [==============================] - 7s 664ms/step - loss: 5.2411e-05 - accuracy: 1.0000 - val_loss: 1.4695 - val_accuracy: 0.7875 Epoch 90/100 10/10 [==============================] - 7s 670ms/step - loss: 5.1189e-05 - accuracy: 1.0000 - val_loss: 1.4891 - val_accuracy: 0.8125 Epoch 91/100 10/10 [==============================] - 7s 666ms/step - loss: 4.6897e-05 - accuracy: 1.0000 - val_loss: 1.4881 - val_accuracy: 0.8125 Epoch 92/100 10/10 [==============================] - 7s 674ms/step - loss: 4.6245e-05 - accuracy: 1.0000 - val_loss: 1.4922 - val_accuracy: 0.7875 Epoch 93/100 10/10 [==============================] - 7s 665ms/step - loss: 4.4929e-05 - accuracy: 1.0000 - val_loss: 1.5004 - val_accuracy: 0.8125 Epoch 94/100 10/10 [==============================] - 7s 666ms/step - loss: 4.2745e-05 - accuracy: 1.0000 - val_loss: 1.5092 - val_accuracy: 0.8000 Epoch 95/100 10/10 [==============================] - 7s 672ms/step - loss: 4.1048e-05 - accuracy: 1.0000 - val_loss: 1.5077 - val_accuracy: 0.8000 Epoch 96/100 10/10 [==============================] - 7s 671ms/step - loss: 3.9749e-05 - accuracy: 1.0000 - val_loss: 1.5135 - val_accuracy: 0.8000 Epoch 97/100 10/10 [==============================] - 7s 665ms/step - loss: 3.8849e-05 - accuracy: 1.0000 - val_loss: 1.5202 - val_accuracy: 0.8000 Epoch 98/100 10/10 [==============================] - 7s 666ms/step - loss: 3.7774e-05 - accuracy: 1.0000 - val_loss: 1.5234 - val_accuracy: 0.8000 Epoch 99/100 10/10 [==============================] - 7s 681ms/step - loss: 3.6858e-05 - accuracy: 1.0000 - val_loss: 1.5293 - val_accuracy: 0.8000 Epoch 100/100 10/10 [==============================] - 7s 673ms/step - loss: 3.6018e-05 - accuracy: 1.0000 - val_loss: 1.5304 - val_accuracy: 0.8000
In [55]:
model.save('./models/genshin2.HDF5')
INFO:tensorflow:Assets written to: ./models/genshin2.HDF5\assets
INFO:tensorflow:Assets written to: ./models/genshin2.HDF5\assets
In [56]:
# 훈련 결과 시각화
acc = history.history['accuracy']
val_acc = history.history['val_accuracy']
loss = history.history['loss']
val_loss = history.history['val_loss']
epochs_range = range(epochs)
plt.figure(figsize=(8, 8))
plt.subplot(1, 2, 1)
plt.plot(epochs_range, acc, label='Training Accuracy')
plt.plot(epochs_range, val_acc, label='Validation Accuracy')
plt.legend(loc='lower right')
plt.title('Training and Validation Accuracy')
plt.subplot(1, 2, 2)
plt.plot(epochs_range, loss, label='Training Loss')
plt.plot(epochs_range, val_loss, label='Validation Loss')
plt.legend(loc='upper right')
plt.title('Training and Validation Loss')
plt.show()
In [57]:
PIL.Image.open('./test/kokomi.jpg')
Out[57]:
In [58]:
# 모델 테스트
char_path = './test/kokomi.jpg'
img = tf.keras.utils.load_img(
char_path, target_size=(img_height, img_width)
)
img_array = tf.keras.utils.img_to_array(img)
img_array = tf.expand_dims(img_array, 0) # 배치 생성
predictions = model.predict(img_array)
score = tf.nn.softmax(predictions[0])
print(
"이 캐릭터는 [ {} ] 같습니다. 추정 확률은 {:.2f} % 입니다."
.format(class_names[np.argmax(score)], 100 * np.max(score))
)
1/1 [==============================] - 0s 130ms/step 이 캐릭터는 [ Kokomi ] 같습니다. 추정 확률은 100.00 % 입니다.
In [59]:
PIL.Image.open('./test/albedo.jpg')
Out[59]:
In [60]:
# 모델 테스트
char_path = './test/albedo.jpg'
img = tf.keras.utils.load_img(
char_path, target_size=(img_height, img_width)
)
img_array = tf.keras.utils.img_to_array(img)
img_array = tf.expand_dims(img_array, 0) # 배치 생성
predictions = model.predict(img_array)
score = tf.nn.softmax(predictions[0])
print(
"이 캐릭터는 [ {} ] 같습니다. 추정 확률은 {:.2f} % 입니다."
.format(class_names[np.argmax(score)], 100 * np.max(score))
)
1/1 [==============================] - 0s 34ms/step 이 캐릭터는 [ Albedo ] 같습니다. 추정 확률은 99.64 % 입니다.
In [61]:
PIL.Image.open('./test/ayaka.jpg')
Out[61]:
In [62]:
# 모델 테스트
char_path = './test/ayaka.jpg'
img = tf.keras.utils.load_img(
char_path, target_size=(img_height, img_width)
)
img_array = tf.keras.utils.img_to_array(img)
img_array = tf.expand_dims(img_array, 0) # 배치 생성
predictions = model.predict(img_array)
score = tf.nn.softmax(predictions[0])
print(
"이 캐릭터는 [ {} ] 같습니다. 추정 확률은 {:.2f} % 입니다."
.format(class_names[np.argmax(score)], 100 * np.max(score))
)
1/1 [==============================] - 0s 72ms/step 이 캐릭터는 [ Ayaka ] 같습니다. 추정 확률은 100.00 % 입니다.
In [67]:
PIL.Image.open('./test/hutao2.jpg')
Out[67]:
In [68]:
# 모델 테스트
char_path = './test/hutao2.jpg'
img = tf.keras.utils.load_img(
char_path, target_size=(img_height, img_width)
)
img_array = tf.keras.utils.img_to_array(img)
img_array = tf.expand_dims(img_array, 0) # 배치 생성
predictions = model.predict(img_array)
score = tf.nn.softmax(predictions[0])
print(
"이 캐릭터는 [ {} ] 같습니다. 추정 확률은 {:.2f} % 입니다."
.format(class_names[np.argmax(score)], 100 * np.max(score))
)
1/1 [==============================] - 0s 41ms/step 이 캐릭터는 [ Hu Tao ] 같습니다. 추정 확률은 100.00 % 입니다.